+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, CliError, Human, Config, human};
use cargo::util::important_paths::{find_root_manifest_for_wd};
},
};
- let err = try!(ops::run_benches(&root, &ops, &options.arg_args));
+ let ws = try!(Workspace::new(&root, config));
+ let err = try!(ops::run_benches(&ws, &ops, &options.arg_args));
match err {
None => Ok(None),
Some(err) => {
use std::env;
+use cargo::core::Workspace;
use cargo::ops::CompileOptions;
use cargo::ops;
use cargo::util::important_paths::{find_root_manifest_for_wd};
target_rustc_args: None,
};
- try!(ops::compile(&root, &opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::compile(&ws, &opts));
Ok(None)
}
use std::env;
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
target: options.flag_target.as_ref().map(|s| &s[..]),
release: options.flag_release,
};
- try!(ops::clean(&root, &opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::clean(&ws, &opts));
Ok(None)
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
},
};
- try!(ops::doc(&root, &doc_opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::doc(&ws, &doc_opts));
Ok(None)
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::find_root_manifest_for_wd;
options.flag_quiet,
&options.flag_color));
let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
- try!(ops::fetch(&root, config));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::fetch(&ws));
Ok(None)
}
use std::env;
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::find_root_manifest_for_wd;
&options.flag_color));
let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
- try!(ops::generate_lockfile(&root, config));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::generate_lockfile(&ws));
Ok(None)
}
-extern crate cargo;
-extern crate docopt;
-extern crate rustc_serialize;
-extern crate toml;
-
+use cargo::core::Workspace;
use cargo::ops::{output_metadata, OutputMetadataOptions, ExportInfo};
use cargo::util::important_paths::find_root_manifest_for_wd;
use cargo::util::{CliResult, Config};
let options = OutputMetadataOptions {
features: options.flag_features,
- manifest_path: &manifest,
no_default_features: options.flag_no_default_features,
no_deps: options.flag_no_deps,
version: options.flag_format_version,
};
- let result = try!(output_metadata(options, config));
+ let ws = try!(Workspace::new(&manifest, config));
+ let result = try!(output_metadata(&ws, &options));
Ok(Some(result))
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::find_root_manifest_for_wd;
options.flag_quiet,
&options.flag_color));
let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd()));
- try!(ops::package(&root, &ops::PackageOpts {
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::package(&ws, &ops::PackageOpts {
config: config,
verify: !options.flag_no_verify,
list: options.flag_list,
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
options.flag_quiet,
&options.flag_color));
let root = try!(find_root_manifest_for_wd(options.flag_manifest_path.clone(), config.cwd()));
+ let ws = try!(Workspace::new(&root, config));
let spec = options.arg_spec.as_ref().map(|s| &s[..]);
- let spec = try!(ops::pkgid(&root, spec, config));
+ let spec = try!(ops::pkgid(&ws, spec));
println!("{}", spec);
Ok(None)
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::find_root_manifest_for_wd;
} = options;
let root = try!(find_root_manifest_for_wd(flag_manifest_path.clone(), config.cwd()));
- try!(ops::publish(&root, &ops::PublishOpts {
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::publish(&ws, &ops::PublishOpts {
config: config,
token: token,
index: host,
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, CliError, Config, Human};
use cargo::util::important_paths::{find_root_manifest_for_wd};
target_rustc_args: None,
};
- match try!(ops::run(&root, &compile_opts, &options.arg_args)) {
+ let ws = try!(Workspace::new(&root, config));
+ match try!(ops::run(&ws, &compile_opts, &options.arg_args)) {
None => Ok(None),
Some(err) => {
// If we never actually spawned the process then that sounds pretty
use std::env;
+use cargo::core::Workspace;
use cargo::ops::{CompileOptions, CompileMode};
use cargo::ops;
use cargo::util::important_paths::{find_root_manifest_for_wd};
target_rustc_args: options.arg_opts.as_ref().map(|a| &a[..]),
};
- try!(ops::compile(&root, &opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::compile(&ws, &opts));
Ok(None)
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
},
};
- try!(ops::doc(&root, &doc_opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::doc(&ws, &doc_opts));
Ok(None)
}
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, CliError, Human, human, Config};
use cargo::util::important_paths::{find_root_manifest_for_wd};
},
};
- let err = try!(ops::run_tests(&root, &ops, &options.arg_args));
+ let ws = try!(Workspace::new(&root, config));
+ let err = try!(ops::run_tests(&ws, &ops, &options.arg_args));
match err {
None => Ok(None),
Some(err) => {
use std::env;
+use cargo::core::Workspace;
use cargo::ops;
use cargo::util::{CliResult, Config};
use cargo::util::important_paths::find_root_manifest_for_wd;
config: config,
};
- try!(ops::update_lockfile(&root, &update_opts));
+ let ws = try!(Workspace::new(&root, config));
+ try!(ops::update_lockfile(&ws, &update_opts));
Ok(None)
}
use semver::Version;
use rustc_serialize::{Encoder, Encodable};
-use core::{Dependency, PackageId, PackageIdSpec, Summary};
+use core::{Dependency, PackageId, PackageIdSpec, Summary, WorkspaceConfig};
use core::package_id::Metadata;
+pub enum EitherManifest {
+ Real(Manifest),
+ Virtual(VirtualManifest),
+}
+
/// Contains all the information about a package, as loaded from a Cargo.toml.
#[derive(Clone, Debug)]
pub struct Manifest {
profiles: Profiles,
publish: bool,
replace: Vec<(PackageIdSpec, Dependency)>,
+ workspace: WorkspaceConfig,
+}
+
+#[derive(Clone, Debug)]
+pub struct VirtualManifest {
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ workspace: WorkspaceConfig,
}
/// General metadata about a package which is just blindly uploaded to the
metadata: ManifestMetadata,
profiles: Profiles,
publish: bool,
- replace: Vec<(PackageIdSpec, Dependency)>) -> Manifest {
+ replace: Vec<(PackageIdSpec, Dependency)>,
+ workspace: WorkspaceConfig) -> Manifest {
Manifest {
summary: summary,
targets: targets,
profiles: profiles,
publish: publish,
replace: replace,
+ workspace: workspace,
}
}
self.links.as_ref().map(|s| &s[..])
}
+ pub fn workspace_config(&self) -> &WorkspaceConfig {
+ &self.workspace
+ }
+
pub fn add_warning(&mut self, s: String) {
self.warnings.push(s)
}
}
}
+impl VirtualManifest {
+ pub fn new(replace: Vec<(PackageIdSpec, Dependency)>,
+ workspace: WorkspaceConfig) -> VirtualManifest {
+ VirtualManifest {
+ replace: replace,
+ workspace: workspace,
+ }
+ }
+
+ pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ &self.replace
+ }
+
+ pub fn workspace_config(&self) -> &WorkspaceConfig {
+ &self.workspace
+ }
+}
+
impl Target {
fn blank() -> Target {
Target {
pub use self::dependency::{Dependency, DependencyInner};
pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles};
+pub use self::manifest::{EitherManifest, VirtualManifest};
pub use self::package::{Package, PackageSet};
pub use self::package_id::{PackageId, Metadata};
pub use self::package_id_spec::PackageIdSpec;
pub use self::shell::{Shell, MultiShell, ShellConfig, Verbosity, ColorConfig};
pub use self::source::{Source, SourceId, SourceMap, GitReference};
pub use self::summary::Summary;
+pub use self::workspace::{Workspace, WorkspaceConfig};
pub mod source;
pub mod package;
pub mod shell;
pub mod registry;
mod package_id_spec;
+mod workspace;
use regex::Regex;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
-use core::{Package, PackageId, SourceId};
+use core::{Package, PackageId, SourceId, Workspace};
use util::{CargoResult, Graph, Config};
use super::Resolve;
pub type Metadata = BTreeMap<String, String>;
impl EncodableResolve {
- pub fn to_resolve(&self, root: &Package, config: &Config)
- -> CargoResult<Resolve> {
- let mut path_deps = HashMap::new();
- try!(build_path_deps(root, &mut path_deps, config));
- let default = root.package_id().source_id();
+ pub fn to_resolve(&self, ws: &Workspace) -> CargoResult<Resolve> {
+ let path_deps = build_path_deps(ws);
+ let default = try!(ws.current()).package_id().source_id();
let mut g = Graph::new();
let mut tmp = HashMap::new();
}
}
-fn build_path_deps(root: &Package,
- map: &mut HashMap<String, SourceId>,
- config: &Config)
- -> CargoResult<()> {
- // If the root crate is *not* a path source, then we're probably in a
- // situation such as `cargo install` with a lock file from a remote
- // dependency. In that case we don't need to fixup any path dependencies (as
- // they're not actually path dependencies any more), so we ignore them.
- if !root.package_id().source_id().is_path() {
- return Ok(())
+fn build_path_deps(ws: &Workspace) -> HashMap<String, SourceId> {
+ // If a crate is *not* a path source, then we're probably in a situation
+ // such as `cargo install` with a lock file from a remote dependency. In
+ // that case we don't need to fixup any path dependencies (as they're not
+ // actually path dependencies any more), so we ignore them.
+ let members = ws.members().filter(|p| {
+ p.package_id().source_id().is_path()
+ }).collect::<Vec<_>>();
+
+ let mut ret = HashMap::new();
+ for member in members.iter() {
+ ret.insert(member.package_id().name().to_string(),
+ member.package_id().source_id().clone());
+ }
+ for member in members.iter() {
+ build(member, ws.config(), &mut ret);
}
- let deps = root.dependencies()
- .iter()
- .map(|d| d.source_id())
- .filter(|id| id.is_path())
- .filter_map(|id| id.url().to_file_path().ok())
- .map(|path| path.join("Cargo.toml"))
- .filter_map(|path| Package::for_path(&path, config).ok());
- for pkg in deps {
- let source_id = pkg.package_id().source_id();
- if map.insert(pkg.name().to_string(), source_id.clone()).is_none() {
- try!(build_path_deps(&pkg, map, config));
+ return ret;
+
+ fn build(pkg: &Package,
+ config: &Config,
+ ret: &mut HashMap<String, SourceId>) {
+ let deps = pkg.dependencies()
+ .iter()
+ .filter(|d| !ret.contains_key(d.name()))
+ .map(|d| d.source_id())
+ .filter(|id| id.is_path())
+ .filter_map(|id| id.url().to_file_path().ok())
+ .map(|path| path.join("Cargo.toml"))
+ .filter_map(|path| Package::for_path(&path, config).ok())
+ .collect::<Vec<_>>();
+ for pkg in deps {
+ ret.insert(pkg.name().to_string(),
+ pkg.package_id().source_id().clone());
+ build(&pkg, config, ret);
}
}
-
- Ok(())
}
fn to_package_id(name: &str,
}
/// Builds the list of all packages required to build the first argument.
-pub fn resolve(summary: &Summary,
- method: &Method,
+pub fn resolve(root: &PackageId,
+ summaries: &[(Summary, Method)],
replacements: &[(PackageIdSpec, Dependency)],
registry: &mut Registry) -> CargoResult<Resolve> {
- trace!("resolve; summary={}", summary.package_id());
- let summary = Rc::new(summary.clone());
-
let cx = Context {
- resolve: Resolve::new(summary.package_id().clone()),
+ resolve: Resolve::new(root.clone()),
activations: HashMap::new(),
replacements: replacements,
};
- let _p = profile::start(format!("resolving: {}", summary.package_id()));
- let cx = try!(activate_deps_loop(cx, registry, summary, method));
+ let _p = profile::start(format!("resolving: {}", root));
+ let cx = try!(activate_deps_loop(cx, registry, summaries));
try!(check_cycles(&cx));
Ok(cx.resolve)
}
/// dependency graph, cx.resolve is returned.
fn activate_deps_loop<'a>(mut cx: Context<'a>,
registry: &mut Registry,
- top: Rc<Summary>,
- top_method: &Method) -> CargoResult<Context<'a>> {
+ summaries: &[(Summary, Method)])
+ -> CargoResult<Context<'a>> {
// Note that a `BinaryHeap` is used for the remaining dependencies that need
// activation. This heap is sorted such that the "largest value" is the most
// constrained dependency, or the one with the least candidates.
// use (those with more candidates).
let mut backtrack_stack = Vec::new();
let mut remaining_deps = BinaryHeap::new();
- remaining_deps.extend(try!(activate(&mut cx, registry, None,
- Candidate { summary: top, replace: None },
- &top_method)));
+ for &(ref summary, ref method) in summaries {
+ debug!("initial activation: {}", summary.package_id());
+ let summary = Rc::new(summary.clone());
+ let candidate = Candidate { summary: summary, replace: None };
+ remaining_deps.extend(try!(activate(&mut cx, registry, None, candidate,
+ method)));
+ }
// Main resolution loop, this is the workhorse of the resolution algorithm.
//
--- /dev/null
+use std::collections::hash_map::{HashMap, Entry};
+use std::collections::BTreeMap;
+use std::path::{Path, PathBuf};
+use std::slice;
+
+use core::{Package, VirtualManifest, EitherManifest, SourceId};
+use core::{PackageIdSpec, Dependency};
+use ops;
+use util::{Config, CargoResult};
+use util::paths;
+
+/// The core abstraction in Cargo for working with a workspace of crates.
+///
+/// A workspace is often created very early on and then threaded through all
+/// other functions. It's typically through this object that the current
+/// package is loaded and/or learned about.
+pub struct Workspace<'cfg> {
+ config: &'cfg Config,
+
+ // This path is a path to where the current cargo subcommand was invoked
+ // from. That is, this is the `--manifest-path` argument to Cargo, and
+ // points to the "main crate" that we're going to worry about.
+ current_manifest: PathBuf,
+
+ // A list of packages found in this workspace. Always includes at least the
+ // package mentioned by `current_manifest`.
+ packages: Packages<'cfg>,
+
+ // If this workspace includes more than one crate, this points to the root
+ // of the workspace. This is `None` in the case that `[workspace]` is
+ // missing, `package.workspace` is missing, and no `Cargo.toml` above
+ // `current_manifest` was found on the filesystem with `[workspace]`.
+ root_manifest: Option<PathBuf>,
+
+ // List of members in this workspace with a listing of all their manifest
+ // paths. The packages themselves can be looked up through the `packages`
+ // set above.
+ members: Vec<PathBuf>,
+}
+
+// Separate structure for tracking loaded packages (to avoid loading anything
+// twice), and this is separate to help appease the borrow checker.
+struct Packages<'cfg> {
+ config: &'cfg Config,
+ packages: HashMap<PathBuf, MaybePackage>,
+}
+
+enum MaybePackage {
+ Package(Package),
+ Virtual(VirtualManifest),
+}
+
+/// Configuration of a workspace in a manifest.
+#[derive(Debug, Clone)]
+pub enum WorkspaceConfig {
+ /// Indicates that `[workspace]` was present and the members were
+ /// optionally specified as well.
+ Root { members: Option<Vec<String>> },
+
+ /// Indicates that `[workspace]` was present and the `root` field is the
+ /// optional value of `package.workspace`, if present.
+ Member { root: Option<String> },
+}
+
+/// An iterator over the member packages of a workspace, returned by
+/// `Workspace::members`
+pub struct Members<'a, 'cfg: 'a> {
+ ws: &'a Workspace<'cfg>,
+ iter: slice::Iter<'a, PathBuf>,
+}
+
+impl<'cfg> Workspace<'cfg> {
+ /// Creates a new workspace given the target manifest pointed to by
+ /// `manifest_path`.
+ ///
+ /// This function will construct the entire workspace by determining the
+ /// root and all member packages. It will then validate the workspace
+ /// before returning it, so `Ok` is only returned for valid workspaces.
+ pub fn new(manifest_path: &Path, config: &'cfg Config)
+ -> CargoResult<Workspace<'cfg>> {
+ let mut ws = Workspace {
+ config: config,
+ current_manifest: manifest_path.to_path_buf(),
+ packages: Packages {
+ config: config,
+ packages: HashMap::new(),
+ },
+ root_manifest: None,
+ members: Vec::new(),
+ };
+ ws.root_manifest = try!(ws.find_root(manifest_path));
+ try!(ws.find_members());
+ try!(ws.validate());
+ Ok(ws)
+ }
+
+ /// Creates a "tempoarary workspace" from one package which only contains
+ /// that package.
+ ///
+ /// This constructor will not touch the filesystem and only creates an
+ /// in-memory workspace. That is, all configuration is ignored, it's just
+ /// intended for that one package.
+ ///
+ /// This is currently only used in niche situations like `cargo install` or
+ /// `cargo package`.
+ pub fn one(package: Package, config: &'cfg Config) -> Workspace<'cfg> {
+ let mut ws = Workspace {
+ config: config,
+ current_manifest: package.manifest_path().to_path_buf(),
+ packages: Packages {
+ config: config,
+ packages: HashMap::new(),
+ },
+ root_manifest: None,
+ members: Vec::new(),
+ };
+ {
+ let key = ws.current_manifest.parent().unwrap();
+ let package = MaybePackage::Package(package);
+ ws.packages.packages.insert(key.to_path_buf(), package);
+ ws.members.push(ws.current_manifest.clone());
+ }
+ return ws
+ }
+
+ /// Returns the current package of this workspace.
+ ///
+ /// Note that this can return an error if it the current manifest is
+ /// actually a "virtual Cargo.toml", in which case an error is returned
+ /// indicating that something else should be passed.
+ pub fn current(&self) -> CargoResult<&Package> {
+ match *self.packages.get(&self.current_manifest) {
+ MaybePackage::Package(ref p) => Ok(p),
+ MaybePackage::Virtual(..) => {
+ bail!("manifest path `{}` is a virtual manifest, but this \
+ command requires running against an actual package in \
+ this workspace", self.current_manifest.display())
+ }
+ }
+ }
+
+ /// Returns the `Config` this workspace is associated with.
+ pub fn config(&self) -> &'cfg Config {
+ self.config
+ }
+
+ /// Returns the root path of this workspace.
+ ///
+ /// That is, this returns the path of the directory containing the
+ /// `Cargo.toml` which is the root of this workspace.
+ pub fn root(&self) -> &Path {
+ match self.root_manifest {
+ Some(ref p) => p,
+ None => &self.current_manifest
+ }.parent().unwrap()
+ }
+
+ /// Returns the root [replace] section of this workspace.
+ ///
+ /// This may be from a virtual crate or an actual crate.
+ pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] {
+ let path = match self.root_manifest {
+ Some(ref p) => p,
+ None => &self.current_manifest,
+ };
+ match *self.packages.get(path) {
+ MaybePackage::Package(ref p) => p.manifest().replace(),
+ MaybePackage::Virtual(ref v) => v.replace(),
+ }
+ }
+
+ /// Returns an iterator over all packages in this workspace
+ pub fn members<'a>(&'a self) -> Members<'a, 'cfg> {
+ Members {
+ ws: self,
+ iter: self.members.iter(),
+ }
+ }
+
+ /// Finds the root of a workspace for the crate whose manifest is located
+ /// at `manifest_path`.
+ ///
+ /// This will parse the `Cargo.toml` at `manifest_path` and then interpret
+ /// the workspace configuration, optionally walking up the filesystem
+ /// looking for other workspace roots.
+ ///
+ /// Returns an error if `manifest_path` isn't actually a valid manifest or
+ /// if some other transient error happens.
+ fn find_root(&mut self, manifest_path: &Path)
+ -> CargoResult<Option<PathBuf>> {
+ {
+ let current = try!(self.packages.load(&manifest_path));
+ match *current.workspace_config() {
+ WorkspaceConfig::Root { .. } => {
+ debug!("find_root - is root {}", manifest_path.display());
+ return Ok(Some(manifest_path.to_path_buf()))
+ }
+ WorkspaceConfig::Member { root: Some(ref path_to_root) } => {
+ let path = manifest_path.parent().unwrap()
+ .join(path_to_root)
+ .join("Cargo.toml");
+ debug!("find_root - pointer {}", path.display());
+ return Ok(Some(paths::normalize_path(&path)))
+ }
+ WorkspaceConfig::Member { root: None } => {}
+ }
+ }
+
+ let mut cur = manifest_path.parent().and_then(|p| p.parent());
+ while let Some(path) = cur {
+ let manifest = path.join("Cargo.toml");
+ debug!("find_root - trying {}", manifest.display());
+ if let Ok(pkg) = self.packages.load(&manifest) {
+ match *pkg.workspace_config() {
+ WorkspaceConfig::Root { .. } => {
+ debug!("find_root - found");
+ return Ok(Some(manifest))
+ }
+ WorkspaceConfig::Member { .. } => {}
+ }
+ }
+ cur = path.parent();
+ }
+
+ Ok(None)
+ }
+
+ /// After the root of a workspace has been located, probes for all members
+ /// of a workspace.
+ ///
+ /// If the `workspace.members` configuration is present, then this just
+ /// verifies that those are all valid packages to point to. Otherwise, this
+ /// will transitively follow all `path` dependencies looking for members of
+ /// the workspace.
+ fn find_members(&mut self) -> CargoResult<()> {
+ let root_manifest = match self.root_manifest {
+ Some(ref path) => path.clone(),
+ None => {
+ debug!("find_members - only me as a member");
+ self.members.push(self.current_manifest.clone());
+ return Ok(())
+ }
+ };
+ let members = {
+ let root = try!(self.packages.load(&root_manifest));
+ match *root.workspace_config() {
+ WorkspaceConfig::Root { ref members } => members.clone(),
+ _ => bail!("root of a workspace inferred but wasn't a root: {}",
+ root_manifest.display()),
+ }
+ };
+
+ if let Some(list) = members {
+ let root = root_manifest.parent().unwrap();
+ for path in list {
+ let manifest_path = root.join(path).join("Cargo.toml");
+ try!(self.find_path_deps(&manifest_path));
+ }
+ }
+
+ self.find_path_deps(&root_manifest)
+ }
+
+ fn find_path_deps(&mut self, manifest_path: &Path) -> CargoResult<()> {
+ if self.members.iter().any(|p| p == manifest_path) {
+ return Ok(())
+ }
+
+ debug!("find_members - {}", manifest_path.display());
+ self.members.push(manifest_path.to_path_buf());
+
+ let candidates = {
+ let pkg = match *try!(self.packages.load(manifest_path)) {
+ MaybePackage::Package(ref p) => p,
+ MaybePackage::Virtual(_) => return Ok(()),
+ };
+ pkg.dependencies()
+ .iter()
+ .map(|d| d.source_id())
+ .filter(|d| d.is_path())
+ .filter_map(|d| d.url().to_file_path().ok())
+ .map(|p| p.join("Cargo.toml"))
+ .collect::<Vec<_>>()
+ };
+ for candidate in candidates {
+ try!(self.find_path_deps(&candidate));
+ }
+ Ok(())
+ }
+
+ /// Validates a workspace, ensuring that a number of invariants are upheld:
+ ///
+ /// 1. A workspace only has one root.
+ /// 2. All workspace members agree on this one root as the root.
+ /// 3. The current crate is a member of this workspace.
+ fn validate(&mut self) -> CargoResult<()> {
+ if self.root_manifest.is_none() {
+ return Ok(())
+ }
+
+ let mut roots = Vec::new();
+ {
+ let mut names = BTreeMap::new();
+ for member in self.members.iter() {
+ let package = self.packages.get(member);
+ match *package.workspace_config() {
+ WorkspaceConfig::Root { .. } => {
+ roots.push(member.parent().unwrap().to_path_buf());
+ }
+ WorkspaceConfig::Member { .. } => {}
+ }
+ let name = match *package {
+ MaybePackage::Package(ref p) => p.name(),
+ MaybePackage::Virtual(_) => continue,
+ };
+ if let Some(prev) = names.insert(name, member) {
+ bail!("two packages named `{}` in this workspace:\n\
+ - {}\n\
+ - {}", name, prev.display(), member.display());
+ }
+ }
+ }
+
+ match roots.len() {
+ 0 => {
+ bail!("`package.workspace` configuration points to a crate \
+ which is not configured with [workspace]: \n\
+ configuration at: {}\n\
+ points to: {}",
+ self.current_manifest.display(),
+ self.root_manifest.as_ref().unwrap().display())
+ }
+ 1 => {}
+ _ => {
+ bail!("multiple workspace roots found in the same workspace:\n{}",
+ roots.iter()
+ .map(|r| format!(" {}", r.display()))
+ .collect::<Vec<_>>()
+ .join("\n"));
+ }
+ }
+
+ for member in self.members.clone() {
+ let root = try!(self.find_root(&member));
+ if root == self.root_manifest {
+ continue
+ }
+
+ match root {
+ Some(root) => {
+ bail!("package `{}` is a member of the wrong workspace\n\
+ expected: {}\n\
+ actual: {}",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display(),
+ root.display());
+ }
+ None => {
+ bail!("workspace member `{}` is not hierarchically below \
+ the workspace root `{}`",
+ member.display(),
+ self.root_manifest.as_ref().unwrap().display());
+ }
+ }
+ }
+
+ if !self.members.contains(&self.current_manifest) {
+ let root = self.root_manifest.as_ref().unwrap();
+ let root_dir = root.parent().unwrap();
+ let current_dir = self.current_manifest.parent().unwrap();
+ let root_pkg = self.packages.get(root);
+
+ let members_msg = match current_dir.strip_prefix(root_dir) {
+ Ok(rel) => {
+ format!("this may be fixable by adding `{}` to the \
+ `workspace.members` array of the manifest \
+ located at: {}",
+ rel.display(),
+ root.display())
+ }
+ Err(_) => {
+ format!("this may be fixable by adding a member to \
+ the `workspace.members` array of the \
+ manifest located at: {}", root.display())
+ }
+ };
+ let extra = match *root_pkg {
+ MaybePackage::Virtual(_) => members_msg,
+ MaybePackage::Package(ref p) => {
+ let members = match *p.manifest().workspace_config() {
+ WorkspaceConfig::Root { ref members } => members,
+ WorkspaceConfig::Member { .. } => unreachable!(),
+ };
+ if members.is_none() {
+ format!("this may be fixable by ensuring that this \
+ crate is depended on by the workspace \
+ root: {}", root.display())
+ } else {
+ members_msg
+ }
+ }
+ };
+ bail!("current package believes it's in a workspace when it's not:\n\
+ current: {}\n\
+ workspace: {}\n\n{}",
+ self.current_manifest.display(),
+ root.display(),
+ extra);
+ }
+
+ Ok(())
+ }
+}
+
+impl<'cfg> Packages<'cfg> {
+ fn get(&self, manifest_path: &Path) -> &MaybePackage {
+ &self.packages[manifest_path.parent().unwrap()]
+ }
+
+ fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> {
+ let key = manifest_path.parent().unwrap();
+ match self.packages.entry(key.to_path_buf()) {
+ Entry::Occupied(e) => Ok(e.into_mut()),
+ Entry::Vacant(v) => {
+ let source_id = try!(SourceId::for_path(key));
+ let pair = try!(ops::read_manifest(&manifest_path, &source_id,
+ self.config));
+ let (manifest, _nested_paths) = pair;
+ Ok(v.insert(match manifest {
+ EitherManifest::Real(manifest) => {
+ MaybePackage::Package(Package::new(manifest,
+ manifest_path))
+ }
+ EitherManifest::Virtual(v) => {
+ MaybePackage::Virtual(v)
+ }
+ }))
+ }
+ }
+ }
+}
+
+impl<'a, 'cfg> Iterator for Members<'a, 'cfg> {
+ type Item = &'a Package;
+
+ fn next(&mut self) -> Option<&'a Package> {
+ loop {
+ let next = self.iter.next().map(|path| {
+ self.ws.packages.get(path)
+ });
+ match next {
+ Some(&MaybePackage::Package(ref p)) => return Some(p),
+ Some(&MaybePackage::Virtual(_)) => {}
+ None => return None,
+ }
+ }
+ }
+}
+
+impl MaybePackage {
+ fn workspace_config(&self) -> &WorkspaceConfig {
+ match *self {
+ MaybePackage::Virtual(ref v) => v.workspace_config(),
+ MaybePackage::Package(ref v) => v.manifest().workspace_config(),
+ }
+ }
+}
use std::fs;
use std::path::Path;
-use core::{Package, Profiles};
+use core::{Profiles, Workspace};
use core::registry::PackageRegistry;
use util::{CargoResult, human, ChainError, Config};
use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
}
/// Cleans the project from build artifacts.
-pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> {
- let root = try!(Package::for_path(manifest_path, opts.config));
- let target_dir = opts.config.target_dir(&root);
+pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> {
+ let target_dir = opts.config.target_dir(&ws);
// If we have a spec, then we need to delete some packages, otherwise, just
// remove the whole target directory and be done with it!
}
let mut registry = PackageRegistry::new(opts.config);
- let resolve = try!(ops::resolve_pkg(&mut registry, &root, opts.config));
+ let resolve = try!(ops::resolve_ws(&mut registry, ws));
let packages = ops::get_resolved_packages(&resolve, registry);
let dest = if opts.release {"release"} else {"debug"};
- let host_layout = try!(Layout::new(opts.config, &root, None, dest));
+ let host_layout = try!(Layout::new(ws, None, dest));
let target_layout = match opts.target {
- Some(target) => {
- Some(try!(Layout::new(opts.config, &root, Some(target), dest)))
- }
+ Some(target) => Some(try!(Layout::new(ws, Some(target), dest))),
None => None,
};
+ let profiles = try!(ws.current()).manifest().profiles();
let mut cx = try!(Context::new(&resolve, &packages, opts.config,
host_layout, target_layout,
BuildConfig::default(),
- root.manifest().profiles()));
+ profiles));
let mut units = Vec::new();
for spec in opts.spec {
let Profiles {
ref release, ref dev, ref test, ref bench, ref doc,
ref custom_build, ref test_deps, ref bench_deps,
- } = *root.manifest().profiles();
+ } = *profiles;
let profiles = [release, dev, test, bench, doc, custom_build,
test_deps, bench_deps];
for profile in profiles.iter() {
//!
use std::collections::HashMap;
-use std::default::Default;
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
use std::sync::Arc;
use core::registry::PackageRegistry;
use core::{Source, SourceId, PackageSet, Package, Target};
-use core::{Profile, TargetKind, Profiles};
+use core::{Profile, TargetKind, Profiles, Workspace};
use core::resolver::{Method, Resolve};
use ops::{self, BuildOutput, ExecEngine};
use sources::PathSource;
}
}
-pub fn compile<'a>(manifest_path: &Path,
- options: &CompileOptions<'a>)
+pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
- debug!("compile; manifest-path={}", manifest_path.display());
-
- let package = try!(Package::for_path(manifest_path, options.config));
- debug!("loaded package; package={}", package);
-
- for key in package.manifest().warnings().iter() {
+ for key in try!(ws.current()).manifest().warnings().iter() {
try!(options.config.shell().warn(key))
}
- compile_pkg(&package, None, options)
+ compile_ws(ws, None, options)
}
-pub fn resolve_dependencies<'a>(root_package: &Package,
- config: &'a Config,
+pub fn resolve_dependencies<'a>(ws: &Workspace<'a>,
source: Option<Box<Source + 'a>>,
features: Vec<String>,
no_default_features: bool)
-> CargoResult<(PackageSet<'a>, Resolve)> {
- let mut registry = PackageRegistry::new(config);
+ let mut registry = PackageRegistry::new(ws.config());
if let Some(source) = source {
- registry.add_preloaded(root_package.package_id().source_id(), source);
+ registry.add_preloaded(try!(ws.current()).package_id().source_id(),
+ source);
}
// First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
- let resolve = try!(ops::resolve_pkg(&mut registry, root_package, config));
+ let resolve = try!(ops::resolve_ws(&mut registry, ws));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
// overrides, etc.
let _p = profile::start("resolving w/ overrides...");
- try!(add_overrides(&mut registry, root_package.root(), config));
+ try!(add_overrides(&mut registry, ws));
let method = Method::Required{
dev_deps: true, // TODO: remove this option?
};
let resolved_with_overrides =
- try!(ops::resolve_with_previous(&mut registry, root_package,
+ try!(ops::resolve_with_previous(&mut registry, ws,
method, Some(&resolve), None));
let packages = ops::get_resolved_packages(&resolved_with_overrides,
Ok((packages, resolved_with_overrides))
}
-pub fn compile_pkg<'a>(root_package: &Package,
- source: Option<Box<Source + 'a>>,
- options: &CompileOptions<'a>)
- -> CargoResult<ops::Compilation<'a>> {
+pub fn compile_ws<'a>(ws: &Workspace<'a>,
+ source: Option<Box<Source + 'a>>,
+ options: &CompileOptions<'a>)
+ -> CargoResult<ops::Compilation<'a>> {
+ let root_package = try!(ws.current());
let CompileOptions { config, jobs, target, spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
}
let (packages, resolve_with_overrides) = {
- try!(resolve_dependencies(root_package, config, source, features,
- no_default_features))
+ try!(resolve_dependencies(ws, source, features, no_default_features))
};
let mut pkgids = Vec::new();
build_config.doc_all = deps;
}
- try!(ops::compile_targets(&package_targets,
+ try!(ops::compile_targets(ws,
+ &package_targets,
&packages,
&resolve_with_overrides,
config,
build_config,
- root_package.manifest().profiles(),
- ))
+ profiles))
};
ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
/// Read the `paths` configuration variable to discover all path overrides that
/// have been configured.
fn add_overrides<'a>(registry: &mut PackageRegistry<'a>,
- cur_path: &Path,
- config: &'a Config) -> CargoResult<()> {
- let paths = match try!(config.get_list("paths")) {
+ ws: &Workspace<'a>) -> CargoResult<()> {
+ let paths = match try!(ws.config().get_list("paths")) {
Some(list) => list,
None => return Ok(())
};
+ let current = try!(ws.current());
let paths = paths.val.iter().map(|&(ref s, ref p)| {
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
}).filter(|&(ref p, _)| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
- cur_path != &**p
+ current.root() != &**p
});
for (path, definition) in paths {
let id = try!(SourceId::for_path(&path));
- let mut source = PathSource::new_recursive(&path, &id, config);
+ let mut source = PathSource::new_recursive(&path, &id, ws.config());
try!(source.update().chain_error(|| {
human(format!("failed to update path override `{}` \
(defined in `{}`)", path.display(),
use std::path::Path;
use std::process::Command;
-use core::{Package, PackageIdSpec};
+use core::{PackageIdSpec, Workspace};
use ops;
use util::CargoResult;
pub compile_opts: ops::CompileOptions<'a>,
}
-pub fn doc(manifest_path: &Path,
+pub fn doc(ws: &Workspace,
options: &DocOptions) -> CargoResult<()> {
- let package = try!(Package::for_path(manifest_path, options.compile_opts.config));
+ let package = try!(ws.current());
let mut lib_names = HashSet::new();
let mut bin_names = HashSet::new();
}
}
- try!(ops::compile(manifest_path, &options.compile_opts));
+ try!(ops::compile(ws, &options.compile_opts));
if options.open_result {
let name = if options.compile_opts.spec.len() > 1 {
// Don't bother locking here as if this is getting deleted there's
// nothing we can do about it and otherwise if it's getting overwritten
// then that's also ok!
- let target_dir = options.compile_opts.config.target_dir(&package);
+ let target_dir = options.compile_opts.config.target_dir(ws);
let path = target_dir.join("doc").join(&name).join("index.html");
let path = path.into_path_unlocked();
if fs::metadata(&path).is_ok() {
-use std::path::Path;
-
use core::registry::PackageRegistry;
-use core::{Package, PackageId, Resolve, PackageSet};
+use core::{PackageId, Resolve, PackageSet, Workspace};
use ops;
-use util::{CargoResult, Config};
+use util::CargoResult;
/// Executes `cargo fetch`.
-pub fn fetch<'a>(manifest_path: &Path,
- config: &'a Config)
- -> CargoResult<(Resolve, PackageSet<'a>)> {
- let package = try!(Package::for_path(manifest_path, config));
- let mut registry = PackageRegistry::new(config);
- let resolve = try!(ops::resolve_pkg(&mut registry, &package, config));
+pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> {
+ let mut registry = PackageRegistry::new(ws.config());
+ let resolve = try!(ops::resolve_ws(&mut registry, ws));
let packages = get_resolved_packages(&resolve, registry);
for id in resolve.iter() {
try!(packages.get(id));
use std::collections::{BTreeMap, HashSet};
-use std::path::Path;
use core::PackageId;
use core::registry::PackageRegistry;
-use core::{Resolve, SourceId, Package};
+use core::{Resolve, SourceId, Workspace};
use core::resolver::Method;
use ops;
use util::config::Config;
pub aggressive: bool,
}
-pub fn generate_lockfile(manifest_path: &Path, config: &Config)
- -> CargoResult<()> {
- let package = try!(Package::for_path(manifest_path, config));
- let mut registry = PackageRegistry::new(config);
- let resolve = try!(ops::resolve_with_previous(&mut registry, &package,
+pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> {
+ let mut registry = PackageRegistry::new(ws.config());
+ let resolve = try!(ops::resolve_with_previous(&mut registry, ws,
Method::Everything,
None, None));
- try!(ops::write_pkg_lockfile(&package, &resolve, config));
+ try!(ops::write_pkg_lockfile(ws, &resolve));
Ok(())
}
-pub fn update_lockfile(manifest_path: &Path,
- opts: &UpdateOptions) -> CargoResult<()> {
+pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions)
+ -> CargoResult<()> {
if opts.aggressive && opts.precise.is_some() {
bail!("cannot specify both aggressive and precise simultaneously")
}
- let package = try!(Package::for_path(manifest_path, opts.config));
-
- let previous_resolve = match try!(ops::load_pkg_lockfile(&package, opts.config)) {
+ let previous_resolve = match try!(ops::load_pkg_lockfile(ws)) {
Some(resolve) => resolve,
- None => return generate_lockfile(manifest_path, opts.config),
+ None => return generate_lockfile(ws),
};
let mut registry = PackageRegistry::new(opts.config);
let mut to_avoid = HashSet::new();
}
let resolve = try!(ops::resolve_with_previous(&mut registry,
- &package,
+ ws,
Method::Everything,
Some(&previous_resolve),
Some(&to_avoid)));
}
}
- try!(ops::write_pkg_lockfile(&package, &resolve, opts.config));
+ try!(ops::write_pkg_lockfile(&ws, &resolve));
return Ok(());
fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
use toml;
use core::{SourceId, Source, Package, Dependency, PackageIdSpec};
-use core::PackageId;
+use core::{PackageId, Workspace};
use ops::{self, CompileFilter};
use sources::{GitSource, PathSource, RegistrySource};
use util::{CargoResult, ChainError, Config, human, internal};
crates.io, or use --path or --git to \
specify alternate source"))))
};
+ let ws = Workspace::one(pkg, config);
+ let pkg = try!(ws.current());
// Preflight checks to check up front whether we'll overwrite something.
// We have to check this again afterwards, but may as well avoid building
let metadata = try!(metadata(config, &root));
let list = try!(read_crate_list(metadata.file()));
let dst = metadata.parent().join("bin");
- try!(check_overwrites(&dst, &pkg, &opts.filter, &list, force));
+ try!(check_overwrites(&dst, pkg, &opts.filter, &list, force));
}
let mut td_opt = None;
let target_dir = if source_id.is_path() {
- config.target_dir(&pkg)
+ config.target_dir(&ws)
} else {
if let Ok(td) = TempDir::new("cargo-install") {
let p = td.path().to_owned();
}
};
config.set_target_dir(target_dir.clone());
- let compile = try!(ops::compile_pkg(&pkg, Some(source), opts).chain_error(|| {
+ let compile = try!(ops::compile_ws(&ws, Some(source), opts).chain_error(|| {
if let Some(td) = td_opt.take() {
// preserve the temporary directory, so the user can inspect it
td.into_path();
let metadata = try!(metadata(config, &root));
let mut list = try!(read_crate_list(metadata.file()));
let dst = metadata.parent().join("bin");
- let duplicates = try!(check_overwrites(&dst, &pkg, &opts.filter, &list, force));
+ let duplicates = try!(check_overwrites(&dst, pkg, &opts.filter,
+ &list, force));
try!(fs::create_dir_all(&dst));
use term::color::BLACK;
+use core::Workspace;
use util::{GitRepo, HgRepo, CargoResult, human, ChainError, internal};
use util::{Config, paths};
};
if !fs::metadata(&path_of_source_file).map(|x| x.is_file()).unwrap_or(false) {
- return paths::write(&path_of_source_file, default_file_content)
+ try!(paths::write(&path_of_source_file, default_file_content));
}
}
+ if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) {
+ let msg = format!("compiling this new crate may not work due to invalid \
+ workspace configuration\n\n{}", e);
+ try!(config.shell().warn(msg));
+ }
+
Ok(())
}
-use std::path::Path;
-
use rustc_serialize::{Encodable, Encoder};
use core::resolver::Resolve;
-use core::{Package, PackageId, PackageSet};
+use core::{Package, PackageId, Workspace};
use ops;
-use util::config::Config;
use util::CargoResult;
const VERSION: u32 = 1;
-pub struct OutputMetadataOptions<'a> {
+pub struct OutputMetadataOptions {
pub features: Vec<String>,
- pub manifest_path: &'a Path,
pub no_default_features: bool,
pub no_deps: bool,
pub version: u32,
/// Loads the manifest, resolves the dependencies of the project to the concrete
/// used versions - considering overrides - and writes all dependencies in a JSON
/// format to stdout.
-pub fn output_metadata(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
+pub fn output_metadata(ws: &Workspace,
+ opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
if opt.version != VERSION {
bail!("metadata version {} not supported, only {} is currently supported",
opt.version, VERSION);
}
if opt.no_deps {
- metadata_no_deps(opt, config)
+ metadata_no_deps(ws, opt)
} else {
- metadata_full(opt, config)
+ metadata_full(ws, opt)
}
}
-fn metadata_no_deps(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
- let root = try!(Package::for_path(opt.manifest_path, config));
+fn metadata_no_deps(ws: &Workspace,
+ _opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
Ok(ExportInfo {
- packages: vec![root],
+ packages: vec![try!(ws.current()).clone()],
resolve: None,
version: VERSION,
})
}
-fn metadata_full(opt: OutputMetadataOptions, config: &Config) -> CargoResult<ExportInfo> {
- let deps = try!(resolve_dependencies(opt.manifest_path,
- config,
- opt.features,
- opt.no_default_features));
+fn metadata_full(ws: &Workspace,
+ opt: &OutputMetadataOptions) -> CargoResult<ExportInfo> {
+ let deps = try!(ops::resolve_dependencies(ws,
+ None,
+ opt.features.clone(),
+ opt.no_default_features));
let (packages, resolve) = deps;
let packages = try!(packages.package_ids()
encodable.encode(s)
}
}
-
-/// Loads the manifest and resolves the dependencies of the project to the
-/// concrete used versions. Afterwards available overrides of dependencies are applied.
-fn resolve_dependencies<'a>(manifest: &Path,
- config: &'a Config,
- features: Vec<String>,
- no_default_features: bool)
- -> CargoResult<(PackageSet<'a>, Resolve)> {
- let package = try!(Package::for_path(manifest, config));
- ops::resolve_dependencies(&package,
- config,
- None,
- features,
- no_default_features)
-}
use git2;
use tar::{Archive, Builder, Header};
-use core::{SourceId, Package, PackageId};
+use core::{SourceId, Package, PackageId, Workspace, Source};
use sources::PathSource;
use util::{self, CargoResult, human, internal, ChainError, Config, FileLock};
use ops;
pub verify: bool,
}
-pub fn package(manifest_path: &Path,
+pub fn package(ws: &Workspace,
opts: &PackageOpts) -> CargoResult<Option<FileLock>> {
- let config = opts.config;
- let path = manifest_path.parent().unwrap();
- let id = try!(SourceId::for_path(path));
- let mut src = PathSource::new(path, &id, config);
- let pkg = try!(src.root_package());
+ let pkg = try!(ws.current());
+ let config = ws.config();
+ let mut src = PathSource::new(pkg.root(),
+ pkg.package_id().source_id(),
+ config);
+ try!(src.update());
if opts.check_metadata {
- try!(check_metadata(&pkg, config));
+ try!(check_metadata(pkg, config));
}
if opts.list {
}
let filename = format!("{}-{}.crate", pkg.name(), pkg.version());
- let dir = config.target_dir(&pkg).join("package");
+ let dir = config.target_dir(ws).join("package");
let mut dst = match dir.open_ro(&filename, config, "packaged crate") {
Ok(f) => return Ok(Some(f)),
Err(..) => {
// it exists.
try!(config.shell().status("Packaging", pkg.package_id().to_string()));
try!(dst.file().set_len(0));
- try!(tar(&pkg, &src, config, dst.file(), &filename).chain_error(|| {
+ try!(tar(ws, &src, dst.file(), &filename).chain_error(|| {
human("failed to prepare local package for uploading")
}));
if opts.verify {
try!(dst.seek(SeekFrom::Start(0)));
- try!(run_verify(config, &pkg, dst.file()).chain_error(|| {
+ try!(run_verify(ws, dst.file()).chain_error(|| {
human("failed to verify package tarball")
}))
}
}
}
-fn tar(pkg: &Package,
+fn tar(ws: &Workspace,
src: &PathSource,
- config: &Config,
dst: &File,
filename: &str) -> CargoResult<()> {
// Prepare the encoder and its header
// Put all package files into a compressed archive
let mut ar = Builder::new(encoder);
+ let pkg = try!(ws.current());
+ let config = ws.config();
let root = pkg.root();
for file in try!(src.list_files(pkg)).iter() {
let relative = util::without_prefix(&file, &root).unwrap();
Ok(())
}
-fn run_verify(config: &Config,
- pkg: &Package,
- tar: &File)
- -> CargoResult<()> {
+fn run_verify(ws: &Workspace, tar: &File) -> CargoResult<()> {
+ let config = ws.config();
+ let pkg = try!(ws.current());
+
try!(config.shell().status("Verifying", pkg));
let f = try!(GzDecoder::new(tar));
let new_pkg = Package::new(new_manifest, &manifest_path);
// Now that we've rewritten all our path dependencies, compile it!
- try!(ops::compile_pkg(&new_pkg, None, &ops::CompileOptions {
+ let ws = Workspace::one(new_pkg, config);
+ try!(ops::compile_ws(&ws, None, &ops::CompileOptions {
config: config,
jobs: None,
target: None,
-use std::path::Path;
-
use ops;
-use core::{PackageIdSpec, Package};
-use util::{CargoResult, Config};
+use core::{PackageIdSpec, Workspace};
+use util::CargoResult;
-pub fn pkgid(manifest_path: &Path,
- spec: Option<&str>,
- config: &Config) -> CargoResult<PackageIdSpec> {
- let package = try!(Package::for_path(manifest_path, config));
- let resolve = match try!(ops::load_pkg_lockfile(&package, config)) {
+pub fn pkgid(ws: &Workspace, spec: Option<&str>) -> CargoResult<PackageIdSpec> {
+ let resolve = match try!(ops::load_pkg_lockfile(ws)) {
Some(resolve) => resolve,
None => bail!("a Cargo.lock must exist for this command"),
};
let pkgid = match spec {
Some(spec) => try!(PackageIdSpec::query_str(spec, resolve.iter())),
- None => package.package_id(),
+ None => try!(ws.current()).package_id(),
};
Ok(PackageIdSpec::from_package_id(pkgid))
}
use std::io;
use std::path::{Path, PathBuf};
-use core::{Package, Manifest, SourceId, PackageId};
+use core::{Package, SourceId, PackageId, EitherManifest};
use util::{self, paths, CargoResult, human, Config, ChainError};
use util::important_paths::find_project_manifest_exact;
use util::toml::Layout;
-pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId,
- config: &Config)
- -> CargoResult<(Manifest, Vec<PathBuf>)> {
+pub fn read_manifest(path: &Path, source_id: &SourceId, config: &Config)
+ -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
+ trace!("read_package; path={}; source-id={}", path.display(), source_id);
+ let contents = try!(paths::read(path));
+
+ let layout = Layout::from_project_path(path.parent().unwrap());
let root = layout.root.clone();
- util::toml::to_manifest(contents, source_id, layout, config).chain_error(|| {
+ util::toml::to_manifest(&contents, source_id, layout, config).chain_error(|| {
human(format!("failed to parse manifest at `{}`",
root.join("Cargo.toml").display()))
})
pub fn read_package(path: &Path, source_id: &SourceId, config: &Config)
-> CargoResult<(Package, Vec<PathBuf>)> {
trace!("read_package; path={}; source-id={}", path.display(), source_id);
- let data = try!(paths::read(path));
-
- let layout = Layout::from_project_path(path.parent().unwrap());
- let (manifest, nested) =
- try!(read_manifest(data.as_bytes(), layout, source_id, config));
+ let (manifest, nested) = try!(read_manifest(path, source_id, config));
+ let manifest = match manifest {
+ EitherManifest::Real(manifest) => manifest,
+ EitherManifest::Virtual(..) => {
+ bail!("found a virtual manifest at `{}` instead of a package \
+ manifest", path.display())
+ }
+ };
Ok((Package::new(manifest, path), nested))
}
use ops::{self, CompileFilter};
use util::{self, CargoResult, process, ProcessError};
-use core::Package;
+use core::Workspace;
-pub fn run(manifest_path: &Path,
+pub fn run(ws: &Workspace,
options: &ops::CompileOptions,
args: &[String]) -> CargoResult<Option<ProcessError>> {
- let config = options.config;
- let root = try!(Package::for_path(manifest_path, config));
+ let config = ws.config();
+ let root = try!(ws.current());
let mut bins = root.manifest().targets().iter().filter(|a| {
!a.is_lib() && !a.is_custom_build() && match options.filter {
}
}
- let compile = try!(ops::compile(manifest_path, options));
+ let compile = try!(ops::compile(ws, options));
let exe = &compile.binaries[0];
let exe = match util::without_prefix(&exe, config.cwd()) {
Some(path) if path.file_name() == Some(path.as_os_str())
use std::io;
use std::path::{PathBuf, Path};
-use core::{Package, Target};
+use core::{Package, Target, Workspace};
use util::{Config, FileLock, CargoResult, Filesystem};
use util::hex::short_hash;
}
impl Layout {
- pub fn new(config: &Config,
- pkg: &Package,
+ pub fn new(ws: &Workspace,
triple: Option<&str>,
dest: &str) -> CargoResult<Layout> {
- let mut path = config.target_dir(pkg);
+ let mut path = ws.config().target_dir(ws);
// Flexible target specifications often point at filenames, so interpret
// the target triple as a Path and then just use the file stem as the
// component for the directory name.
path.push(Path::new(triple).file_stem().unwrap());
}
path.push(dest);
- Layout::at(config, path)
+ Layout::at(ws.config(), path)
}
pub fn at(config: &Config, root: Filesystem) -> CargoResult<Layout> {
use std::sync::Arc;
use core::{Package, PackageId, PackageSet, Target, Resolve};
-use core::{Profile, Profiles};
+use core::{Profile, Profiles, Workspace};
use core::shell::ColorConfig;
use util::{self, CargoResult, human};
use util::{Config, internal, ChainError, profile, join_paths};
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
-pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>,
+pub fn compile_targets<'a, 'cfg: 'a>(ws: &Workspace<'cfg>,
+ pkg_targets: &'a PackagesToBuild<'a>,
packages: &'a PackageSet<'cfg>,
resolve: &'a Resolve,
config: &'cfg Config,
let dest = if build_config.release {"release"} else {"debug"};
let root = try!(packages.get(resolve.root()));
- let host_layout = try!(Layout::new(config, root, None, &dest));
+ let host_layout = try!(Layout::new(ws, None, &dest));
let target_layout = match build_config.requested_target.as_ref() {
Some(target) => {
- Some(try!(layout::Layout::new(config, root, Some(&target), &dest)))
+ Some(try!(layout::Layout::new(ws, Some(&target), &dest)))
}
None => None,
};
use std::ffi::{OsString, OsStr};
-use std::path::Path;
use ops::{self, ExecEngine, ProcessEngine, Compilation};
use util::{self, CargoResult, CargoTestError, ProcessError};
+use core::Workspace;
pub struct TestOptions<'a> {
pub compile_opts: ops::CompileOptions<'a>,
pub only_doc: bool,
}
-pub fn run_tests(manifest_path: &Path,
+pub fn run_tests(ws: &Workspace,
options: &TestOptions,
test_args: &[String]) -> CargoResult<Option<CargoTestError>> {
- let compilation = try!(compile_tests(manifest_path, options));
+ let compilation = try!(compile_tests(ws, options));
if options.no_run {
return Ok(None)
}
}
-pub fn run_benches(manifest_path: &Path,
+pub fn run_benches(ws: &Workspace,
options: &TestOptions,
args: &[String]) -> CargoResult<Option<CargoTestError>> {
let mut args = args.to_vec();
args.push("--bench".to_string());
- let compilation = try!(compile_tests(manifest_path, options));
+ let compilation = try!(compile_tests(ws, options));
if options.no_run {
return Ok(None)
}
}
-fn compile_tests<'a>(manifest_path: &Path,
+fn compile_tests<'a>(ws: &Workspace<'a>,
options: &TestOptions<'a>)
-> CargoResult<Compilation<'a>> {
- let mut compilation = try!(ops::compile(manifest_path,
- &options.compile_opts));
+ let mut compilation = try!(ops::compile(ws, &options.compile_opts));
compilation.tests.sort_by(|a, b| {
(a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1))
});
use rustc_serialize::{Encodable, Decodable};
use toml::{self, Encoder, Value};
-use core::{Resolve, resolver, Package};
-use util::{CargoResult, ChainError, human, Config, Filesystem};
+use core::{Resolve, resolver, Workspace};
+use util::{CargoResult, ChainError, human, Filesystem};
use util::toml as cargo_toml;
-pub fn load_pkg_lockfile(pkg: &Package, config: &Config)
- -> CargoResult<Option<Resolve>> {
- if !pkg.root().join("Cargo.lock").exists() {
+pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult<Option<Resolve>> {
+ if !ws.root().join("Cargo.lock").exists() {
return Ok(None)
}
- let root = Filesystem::new(pkg.root().to_path_buf());
- let mut f = try!(root.open_ro("Cargo.lock", config, "Cargo.lock file"));
+ let root = Filesystem::new(ws.root().to_path_buf());
+ let mut f = try!(root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"));
let mut s = String::new();
try!(f.read_to_string(&mut s).chain_error(|| {
}));
(|| {
- let table = toml::Value::Table(try!(cargo_toml::parse(&s, f.path(), config)));
+ let table = try!(cargo_toml::parse(&s, f.path(), ws.config()));
+ let table = toml::Value::Table(table);
let mut d = toml::Decoder::new(table);
let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d));
- Ok(Some(try!(v.to_resolve(pkg, config))))
+ Ok(Some(try!(v.to_resolve(ws))))
}).chain_error(|| {
human(format!("failed to parse lock file at: {}", f.path().display()))
})
}
-pub fn write_pkg_lockfile(pkg: &Package,
- resolve: &Resolve,
- config: &Config) -> CargoResult<()> {
+pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> {
let mut e = Encoder::new();
resolve.encode(&mut e).unwrap();
None => {}
}
- let root = Filesystem::new(pkg.root().to_path_buf());
+ let root = Filesystem::new(ws.root().to_path_buf());
// Load the original lockfile if it exists.
//
// If the lockfile contents haven't changed so don't rewrite it. This is
// helpful on read-only filesystems.
- let orig = root.open_ro("Cargo.lock", config, "Cargo.lock file");
+ let orig = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file");
let orig = orig.and_then(|mut f| {
let mut s = String::new();
try!(f.read_to_string(&mut s));
}
// Ok, if that didn't work just write it out
- root.open_rw("Cargo.lock", config, "Cargo.lock file").and_then(|mut f| {
+ root.open_rw("Cargo.lock", ws.config(), "Cargo.lock file").and_then(|mut f| {
try!(f.file().set_len(0));
try!(f.write_all(out.as_bytes()));
Ok(())
}).chain_error(|| {
human(format!("failed to write {}",
- pkg.root().join("Cargo.lock").display()))
+ ws.root().join("Cargo.lock").display()))
})
}
pub use self::cargo_clean::{clean, CleanOptions};
-pub use self::cargo_compile::{compile, compile_pkg, resolve_dependencies, CompileOptions};
+pub use self::cargo_compile::{compile, compile_ws, resolve_dependencies, CompileOptions};
pub use self::cargo_compile::{CompileFilter, CompileMode};
pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, Unit};
pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts};
pub use self::cargo_fetch::{fetch, get_resolved_packages};
pub use self::cargo_pkgid::pkgid;
-pub use self::resolve::{resolve_pkg, resolve_with_previous};
+pub use self::resolve::{resolve_ws, resolve_with_previous};
pub use self::cargo_output_metadata::{output_metadata, OutputMetadataOptions, ExportInfo};
mod cargo_clean;
use std::env;
use std::fs::{self, File};
use std::iter::repeat;
-use std::path::{Path, PathBuf};
+use std::path::PathBuf;
use std::time::Duration;
use curl::easy::Easy;
use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET};
use core::source::Source;
-use core::{Package, SourceId};
+use core::{Package, SourceId, Workspace};
use core::dependency::Kind;
use core::manifest::ManifestMetadata;
use ops;
pub allow_dirty: bool,
}
-pub fn publish(manifest_path: &Path, opts: &PublishOpts) -> CargoResult<()> {
- let pkg = try!(Package::for_path(&manifest_path, opts.config));
+pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> {
+ let pkg = try!(ws.current());
if !pkg.publish() {
bail!("some crates cannot be published.\n\
// Prepare a tarball, with a non-surpressable warning if metadata
// is missing since this is being put online.
- let tarball = try!(ops::package(manifest_path, &ops::PackageOpts {
+ let tarball = try!(ops::package(ws, &ops::PackageOpts {
config: opts.config,
verify: opts.verify,
list: false,
use std::collections::{HashMap, HashSet};
-use core::{Package, PackageId, SourceId};
+use core::{PackageId, SourceId, Workspace};
use core::registry::PackageRegistry;
use core::resolver::{self, Resolve, Method};
use ops;
-use util::{CargoResult, Config};
+use util::CargoResult;
/// Resolve all dependencies for the specified `package` using the previous
/// lockfile as a guide if present.
///
/// This function will also write the result of resolution as a new
/// lockfile.
-pub fn resolve_pkg(registry: &mut PackageRegistry,
- package: &Package,
- config: &Config)
+pub fn resolve_ws(registry: &mut PackageRegistry, ws: &Workspace)
-> CargoResult<Resolve> {
- let prev = try!(ops::load_pkg_lockfile(package, config));
- let resolve = try!(resolve_with_previous(registry, package,
+ let prev = try!(ops::load_pkg_lockfile(ws));
+ let resolve = try!(resolve_with_previous(registry, ws,
Method::Everything,
prev.as_ref(), None));
- if package.package_id().source_id().is_path() {
- try!(ops::write_pkg_lockfile(package, &resolve, config));
+ if try!(ws.current()).package_id().source_id().is_path() {
+ try!(ops::write_pkg_lockfile(ws, &resolve));
}
Ok(resolve)
}
/// The previous resolve normally comes from a lockfile. This function does not
/// read or write lockfiles from the filesystem.
pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry,
- package: &Package,
+ ws: &Workspace,
method: Method,
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>)
-> CargoResult<Resolve> {
- try!(registry.add_sources(&[package.package_id().source_id()
- .clone()]));
-
// Here we place an artificial limitation that all non-registry sources
// cannot be locked at more than one revision. This means that if a git
// repository provides more than one package, they must all be updated in
.filter(|s| !s.is_registry()));
}
- let summary = package.summary().clone();
- let (summary, replace) = match previous {
- Some(r) => {
- // In the case where a previous instance of resolve is available, we
- // want to lock as many packages as possible to the previous version
- // without disturbing the graph structure. To this end we perform
- // two actions here:
- //
- // 1. We inform the package registry of all locked packages. This
- // involves informing it of both the locked package's id as well
- // as the versions of all locked dependencies. The registry will
- // then takes this information into account when it is queried.
- //
- // 2. The specified package's summary will have its dependencies
- // modified to their precise variants. This will instruct the
- // first step of the resolution process to not query for ranges
- // but rather for precise dependency versions.
- //
- // This process must handle altered dependencies, however, as
- // it's possible for a manifest to change over time to have
- // dependencies added, removed, or modified to different version
- // ranges. To deal with this, we only actually lock a dependency
- // to the previously resolved version if the dependency listed
- // still matches the locked version.
- for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
- let deps = r.deps_not_replaced(node)
- .filter(|p| keep(p, to_avoid, &to_avoid_sources))
- .cloned().collect();
- registry.register_lock(node.clone(), deps);
+ let mut summaries = Vec::new();
+ for member in ws.members() {
+ try!(registry.add_sources(&[member.package_id().source_id()
+ .clone()]));
+
+ // If we're resolving everything then we include all members of the
+ // workspace. If we want a specific set of requirements then we only
+ // resolve the main crate as it's the only one we're compiling. This
+ // case should only happen after we have a previous resolution, however,
+ // so assert that the previous exists.
+ let method = match method {
+ Method::Everything => Method::Everything,
+ Method::Required { .. } => {
+ assert!(previous.is_some());
+ if member.package_id() == try!(ws.current()).package_id() {
+ method
+ } else {
+ continue
+ }
}
+ };
- let summary = {
- let map = r.deps_not_replaced(r.root()).filter(|p| {
- keep(p, to_avoid, &to_avoid_sources)
- }).map(|d| {
- (d.name(), d)
- }).collect::<HashMap<_, _>>();
+ // If we don't have a previous instance of resolve then we just need to
+ // resolve our entire summary (method should be Everything) and we just
+ // move along to the next member.
+ let r = match previous {
+ Some(r) => r,
+ None => {
+ summaries.push((member.summary().clone(), method));
+ continue
+ }
+ };
- summary.map_dependencies(|dep| {
- match map.get(dep.name()) {
- Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
- _ => dep,
- }
- })
- };
- let replace = package.manifest().replace();
- let replace = replace.iter().map(|&(ref spec, ref dep)| {
+ // In the case where a previous instance of resolve is available, we
+ // want to lock as many packages as possible to the previous version
+ // without disturbing the graph structure. To this end we perform
+ // two actions here:
+ //
+ // 1. We inform the package registry of all locked packages. This
+ // involves informing it of both the locked package's id as well
+ // as the versions of all locked dependencies. The registry will
+ // then takes this information into account when it is queried.
+ //
+ // 2. The specified package's summary will have its dependencies
+ // modified to their precise variants. This will instruct the
+ // first step of the resolution process to not query for ranges
+ // but rather for precise dependency versions.
+ //
+ // This process must handle altered dependencies, however, as
+ // it's possible for a manifest to change over time to have
+ // dependencies added, removed, or modified to different version
+ // ranges. To deal with this, we only actually lock a dependency
+ // to the previously resolved version if the dependency listed
+ // still matches the locked version.
+ for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
+ let deps = r.deps_not_replaced(node)
+ .filter(|p| keep(p, to_avoid, &to_avoid_sources))
+ .cloned().collect();
+ registry.register_lock(node.clone(), deps);
+ }
+
+ let summary = {
+ let map = r.deps_not_replaced(member.package_id()).filter(|p| {
+ keep(p, to_avoid, &to_avoid_sources)
+ }).map(|d| {
+ (d.name(), d)
+ }).collect::<HashMap<_, _>>();
+
+ member.summary().clone().map_dependencies(|dep| {
+ match map.get(dep.name()) {
+ Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
+ _ => dep,
+ }
+ })
+ };
+ summaries.push((summary, method));
+ }
+
+ let root_replace = ws.root_replace();
+
+ let replace = match previous {
+ Some(r) => {
+ root_replace.iter().map(|&(ref spec, ref dep)| {
for (key, val) in r.replacements().iter() {
if spec.matches(key) &&
dep.matches_id(val) &&
}
}
(spec.clone(), dep.clone())
- }).collect::<Vec<_>>();
- (summary, replace)
+ }).collect::<Vec<_>>()
}
- None => (summary, package.manifest().replace().to_owned()),
+ None => root_replace.to_vec(),
};
- let mut resolved = try!(resolver::resolve(&summary, &method, &replace,
+ let mut resolved = try!(resolver::resolve(try!(ws.current()).package_id(),
+ &summaries,
+ &replace,
registry));
if let Some(previous) = previous {
resolved.copy_metadata(previous);
let mut url = url.clone();
// Strip a trailing slash
- url.path_segments_mut().unwrap().pop_if_empty();
+ if url.path().ends_with("/") {
+ url.path_segments_mut().unwrap().pop_if_empty();
+ }
// HACKHACK: For github URL's specifically just lowercase
// everything. GitHub treats both the same, but they hash
use rustc_serialize::{Encodable,Encoder};
use toml;
use core::shell::{Verbosity, ColorConfig};
-use core::{MultiShell, Package};
+use core::{MultiShell, Workspace};
use util::{CargoResult, CargoError, ChainError, Rustc, internal, human};
use util::Filesystem;
pub fn cwd(&self) -> &Path { &self.cwd }
- pub fn target_dir(&self, pkg: &Package) -> Filesystem {
+ pub fn target_dir(&self, ws: &Workspace) -> Filesystem {
self.target_dir.borrow().clone().unwrap_or_else(|| {
- Filesystem::new(pkg.root().join("target"))
+ Filesystem::new(ws.root().join("target"))
})
}
use semver::{self, VersionReq};
use rustc_serialize::{Decodable, Decoder};
-use core::{SourceId, Profiles, PackageIdSpec};
-use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId,
- GitReference};
+use core::{SourceId, Profiles, PackageIdSpec, GitReference, WorkspaceConfig};
+use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId};
+use core::{EitherManifest, VirtualManifest};
use core::dependency::{Kind, Platform};
use core::manifest::{LibKind, Profile, ManifestMetadata};
use core::package_id::Metadata;
}
}
-pub fn to_manifest(contents: &[u8],
+pub fn to_manifest(contents: &str,
source_id: &SourceId,
layout: Layout,
config: &Config)
- -> CargoResult<(Manifest, Vec<PathBuf>)> {
+ -> CargoResult<(EitherManifest, Vec<PathBuf>)> {
let manifest = layout.root.join("Cargo.toml");
let manifest = match util::without_prefix(&manifest, config.cwd()) {
Some(path) => path.to_path_buf(),
None => manifest.clone(),
};
- let contents = try!(str::from_utf8(contents).map_err(|_| {
- human(format!("{} is not valid UTF-8", manifest.display()))
- }));
let root = try!(parse(contents, &manifest, config));
let mut d = toml::Decoder::new(toml::Value::Table(root));
let manifest: TomlManifest = try!(Decodable::decode(&mut d).map_err(|e| {
human(e.to_string())
}));
- let pair = try!(manifest.to_manifest(source_id, &layout, config));
- let (mut manifest, paths) = pair;
- match d.toml {
- Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()),
- None => {}
- }
- if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
- bail!("no targets specified in the manifest\n \
- either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] \
- section must be present")
- }
- return Ok((manifest, paths));
+ return match manifest.to_real_manifest(source_id, &layout, config) {
+ Ok((mut manifest, paths)) => {
+ if let Some(ref toml) = d.toml {
+ add_unused_keys(&mut manifest, toml, String::new());
+ }
+ if !manifest.targets().iter().any(|t| !t.is_custom_build()) {
+ bail!("no targets specified in the manifest\n \
+ either src/lib.rs, src/main.rs, a [lib] section, or \
+ [[bin]] section must be present")
+ }
+ Ok((EitherManifest::Real(manifest), paths))
+ }
+ Err(e) => {
+ match manifest.to_virtual_manifest(source_id, &layout, config) {
+ Ok((m, paths)) => Ok((EitherManifest::Virtual(m), paths)),
+ Err(..) => Err(e),
+ }
+ }
+ };
fn add_unused_keys(m: &mut Manifest, toml: &toml::Value, key: String) {
if key == "package.metadata" {
features: Option<HashMap<String, Vec<String>>>,
target: Option<HashMap<String, TomlPlatform>>,
replace: Option<HashMap<String, TomlDependency>>,
+ workspace: Option<TomlWorkspace>,
}
#[derive(RustcDecodable, Clone, Default)]
exclude: Option<Vec<String>>,
include: Option<Vec<String>>,
publish: Option<bool>,
+ workspace: Option<String>,
// package metadata
description: Option<String>,
repository: Option<String>,
}
+#[derive(RustcDecodable)]
+pub struct TomlWorkspace {
+ members: Option<Vec<String>>,
+}
+
pub struct TomlVersion {
version: semver::Version,
}
}
impl TomlManifest {
- pub fn to_manifest(&self, source_id: &SourceId, layout: &Layout,
- config: &Config)
- -> CargoResult<(Manifest, Vec<PathBuf>)> {
+ fn to_real_manifest(&self,
+ source_id: &SourceId,
+ layout: &Layout,
+ config: &Config)
+ -> CargoResult<(Manifest, Vec<PathBuf>)> {
let mut nested_paths = vec![];
let mut warnings = vec![];
}
let mut deps = Vec::new();
- let mut replace = Vec::new();
+ let replace;
{
}
}
- if let Some(ref map) = self.replace {
- for (spec, replacement) in map {
- let spec = try!(PackageIdSpec::parse(spec));
-
- let version_specified = match *replacement {
- TomlDependency::Detailed(ref d) => d.version.is_some(),
- TomlDependency::Simple(..) => true,
- };
- if version_specified {
- bail!("replacements cannot specify a version \
- requirement, but found one for `{}`", spec);
- }
-
- let dep = try!(replacement.to_dependency(spec.name(),
- &mut cx,
- None));
- let dep = {
- let version = try!(spec.version().chain_error(|| {
- human(format!("replacements must specify a version \
- to replace, but `{}` does not",
- spec))
- }));
- let req = VersionReq::exact(version);
- dep.clone_inner().set_version_req(req)
- .into_dependency()
- };
- replace.push((spec, dep));
- }
- }
+ replace = try!(self.replace(&mut cx));
}
{
repository: project.repository.clone(),
keywords: project.keywords.clone().unwrap_or(Vec::new()),
};
+
+ let workspace_config = match (self.workspace.as_ref(),
+ project.workspace.as_ref()) {
+ (Some(config), None) => {
+ WorkspaceConfig::Root { members: config.members.clone() }
+ }
+ (None, root) => {
+ WorkspaceConfig::Member { root: root.cloned() }
+ }
+ (Some(..), Some(..)) => {
+ bail!("cannot configure both `package.workspace` and \
+ `[workspace]`, only one can be specified")
+ }
+ };
let profiles = build_profiles(&self.profile);
let publish = project.publish.unwrap_or(true);
let mut manifest = Manifest::new(summary,
metadata,
profiles,
publish,
- replace);
+ replace,
+ workspace_config);
if project.license_file.is_some() && project.license.is_some() {
manifest.add_warning(format!("only one of `license` or \
`license-file` is necessary"));
Ok((manifest, nested_paths))
}
+
+ fn to_virtual_manifest(&self,
+ source_id: &SourceId,
+ layout: &Layout,
+ config: &Config)
+ -> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
+ if self.project.is_some() {
+ bail!("virtual manifests do not define [project]");
+ }
+ if self.package.is_some() {
+ bail!("virtual manifests do not define [package]");
+ }
+ if self.lib.is_some() {
+ bail!("virtual manifests do not specifiy [lib]");
+ }
+ if self.bin.is_some() {
+ bail!("virtual manifests do not specifiy [[bin]]");
+ }
+ if self.example.is_some() {
+ bail!("virtual manifests do not specifiy [[example]]");
+ }
+ if self.test.is_some() {
+ bail!("virtual manifests do not specifiy [[test]]");
+ }
+ if self.bench.is_some() {
+ bail!("virtual manifests do not specifiy [[bench]]");
+ }
+
+ let mut nested_paths = Vec::new();
+ let mut warnings = Vec::new();
+ let mut deps = Vec::new();
+ let replace = try!(self.replace(&mut Context {
+ deps: &mut deps,
+ source_id: source_id,
+ nested_paths: &mut nested_paths,
+ config: config,
+ warnings: &mut warnings,
+ platform: None,
+ layout: layout,
+ }));
+ let workspace_config = match self.workspace {
+ Some(ref config) => {
+ WorkspaceConfig::Root { members: config.members.clone() }
+ }
+ None => {
+ bail!("virtual manifests must be configured with [workspace]");
+ }
+ };
+ Ok((VirtualManifest::new(replace, workspace_config), nested_paths))
+ }
+
+ fn replace(&self, cx: &mut Context)
+ -> CargoResult<Vec<(PackageIdSpec, Dependency)>> {
+ let map = match self.replace {
+ Some(ref map) => map,
+ None => return Ok(Vec::new()),
+ };
+
+ let mut replace = Vec::new();
+ for (spec, replacement) in map {
+ let spec = try!(PackageIdSpec::parse(spec));
+
+ let version_specified = match *replacement {
+ TomlDependency::Detailed(ref d) => d.version.is_some(),
+ TomlDependency::Simple(..) => true,
+ };
+ if version_specified {
+ bail!("replacements cannot specify a version \
+ requirement, but found one for `{}`", spec);
+ }
+
+ let dep = try!(replacement.to_dependency(spec.name(), cx, None));
+ let dep = {
+ let version = try!(spec.version().chain_error(|| {
+ human(format!("replacements must specify a version \
+ to replace, but `{}` does not",
+ spec))
+ }));
+ let req = VersionReq::exact(version);
+ dep.clone_inner().set_version_req(req)
+ .into_dependency()
+ };
+ replace.push((spec, dep));
+ }
+ Ok(replace)
+ }
}
/// Will check a list of toml targets, and make sure the target names are unique within a vector.
publish = false
```
+## The `workspace` Field (optional)
+
+The `workspace` field can be used to configure the workspace that this package
+will be a member of. If not specified this will be inferred as the first
+Cargo.toml with `[workspace]` upwards in the filesystem.
+
+```toml
+[package]
+# ...
+workspace = "path/to/root"
+```
+
+For more information, see the documentation for the workspace table below.
+
## Package Metadata
There are a number of optional metadata fields also accepted under the
high-level packages that are designed for curation. If a feature is optional, it
can almost certainly be expressed as a separate package.
+# The `[workspace]` Section
+
+Projects can define a workspace which is a set of crates that will all share the
+same `Cargo.lock` and output directory. The `[workspace]` table can be defined
+as:
+
+```toml
+[workspace]
+
+# Optional key, inferred if not present
+members = ["path/to/member1", "path/to/member2"]
+```
+
+Workspaces were added to Cargo as part [RFC 1525] and have a number of
+properties:
+
+* A workspace can contain multiple crates where one of them is the root crate.
+* The root crate's `Cargo.toml` contains the `[workspace]` table, but is not
+ required to have other configuration.
+* Whenever any crate in the workspace is compiled, output is placed next to the
+ root crate's `Cargo.toml`.
+* The lock file for all crates in the workspace resides next to the root crate's
+ `Cargo.toml`.
+* The `[replace]` section in `Cargo.toml` is only recognized at the workspace
+ root crate, it's ignored in member crates' manifests.
+
+[RFC 1525]: https://github.com/rust-lang/rfcs/blob/master/text/1525-cargo-workspace.md
+
+The root crate of a workspace, indicated by the presence of `[workspace]` in
+its manifest, is responsible for defining the entire workspace (listing all
+members). This can be done through the `members` key, and if it is omitted then
+members are implicitly included through all `path` dependencies. Note that
+members of the workspaces listed explicitly will also have their path
+dependencies included in the workspace.
+
+The `package.workspace` manifest key (described above) is used in member crates
+to point at a workspace's root crate. If this key is omitted then it is inferred
+to be the first crate whose manifest contains `[workspace]` upwards in the
+filesystem.
+
+A crate may either specify `package.workspace` or specify `[workspace]`. That
+is, a crate cannot both be a root crate in a workspace (contain `[workspace]`)
+and also be a member crate of another workspace (contain `package.workspace`).
+
+Most of the time workspaces will not need to be dealt with as `cargo new` and
+`cargo init` will handle workspace configuration automatically.
+
# The Project Layout
If your project is an executable, name the main source file `src/main.rs`. If it
impl<'a> ham::Matcher<&'a mut ProcessBuilder> for Execs {
fn matches(&self, process: &'a mut ProcessBuilder) -> ham::MatchResult {
+ println!("running {}", process);
let res = process.exec_with_output();
match res {
extern crate syntax;
use rustc_plugin::Registry;
- use syntax::ast::TokenTree;
+ use syntax::tokenstream::TokenTree;
use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
extern crate baz;
use rustc_plugin::Registry;
- use syntax::ast::TokenTree;
+ use syntax::tokenstream::TokenTree;
use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, MacEager, MacResult};
fn resolve<R: Registry>(pkg: PackageId, deps: Vec<Dependency>,
registry: &mut R)
-> CargoResult<Vec<PackageId>> {
- let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
+ let summary = Summary::new(pkg.clone(), deps, HashMap::new()).unwrap();
let method = Method::Everything;
- Ok(try!(resolver::resolve(&summary, &method, &[], registry)).iter().map(|p| {
+ Ok(try!(resolver::resolve(&pkg,
+ &[(summary, method)],
+ &[],
+ registry)).iter().map(|p| {
p.clone()
}).collect())
}
--- /dev/null
+extern crate cargotest;
+extern crate hamcrest;
+
+use cargotest::support::{project, execs};
+use cargotest::support::registry::Package;
+use hamcrest::{assert_that, existing_file, existing_dir, is_not};
+
+#[test]
+fn simple_explicit() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), is_not(existing_file()));
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+
+ assert_that(&p.root().join("Cargo.lock"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn inferred_root() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), is_not(existing_file()));
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+
+ assert_that(&p.root().join("Cargo.lock"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn inferred_path_dep() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), is_not(existing_file()));
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+
+ assert_that(&p.root().join("Cargo.lock"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn transitive_path_dep() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "bar" }
+
+ [workspace]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ baz = { path = "../baz" }
+ "#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "")
+ .file("baz/Cargo.toml", r#"
+ [project]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("baz/src/main.rs", "fn main() {}")
+ .file("baz/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("build"), execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), is_not(existing_file()));
+ assert_that(&p.bin("baz"), is_not(existing_file()));
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+ assert_that(&p.bin("baz"), is_not(existing_file()));
+
+ assert_that(p.cargo("build").cwd(p.root().join("baz")),
+ execs().with_status(0));
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+ assert_that(&p.bin("baz"), existing_file());
+
+ assert_that(&p.root().join("Cargo.lock"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+ assert_that(&p.root().join("baz/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn parent_pointer_works() {
+ let p = project("foo")
+ .file("foo/Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ bar = { path = "../bar" }
+
+ [workspace]
+ "#)
+ .file("foo/src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../foo"
+ "#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("bar/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("build").cwd(p.root().join("foo")),
+ execs().with_status(0));
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.root().join("foo/Cargo.lock"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn same_names_in_workspace() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: two packages named `foo` in this workspace:
+- [..]Cargo.toml
+- [..]Cargo.toml
+"));
+}
+
+#[test]
+fn parent_doesnt_point_to_child() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(101)
+ .with_stderr("\
+error: current package believes it's in a workspace when it's not:
+current: [..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable [..]
+"));
+}
+
+#[test]
+fn invalid_parent_pointer() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "foo"
+ "#)
+ .file("src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+ [..]
+"));
+}
+
+#[test]
+fn invalid_members() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["foo"]
+ "#)
+ .file("src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: failed to read `[..]Cargo.toml`
+
+Caused by:
+ [..]
+"));
+}
+
+#[test]
+fn bare_workspace_ok() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#)
+ .file("src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"), execs().with_status(0));
+}
+
+#[test]
+fn two_roots() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = [".."]
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: multiple workspace roots found in the same workspace:
+ [..]
+ [..]
+"));
+}
+
+#[test]
+fn workspace_isnt_root() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "bar"
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: root of a workspace inferred but wasn't a root: [..]
+"));
+}
+
+#[test]
+fn dangling_member() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = "../baz"
+ "#)
+ .file("bar/src/main.rs", "fn main() {}")
+ .file("baz/Cargo.toml", r#"
+ [project]
+ name = "baz"
+ version = "0.1.0"
+ authors = []
+ workspace = "../baz"
+ "#)
+ .file("baz/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: package `[..]` is a member of the wrong workspace
+expected: [..]
+actual: [..]
+"));
+}
+
+#[test]
+fn cycle() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ workspace = "bar"
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(101));
+}
+
+#[test]
+fn share_dependencies() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "0.1"
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "< 0.1.5"
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ Package::new("dep1", "0.1.3").publish();
+ Package::new("dep1", "0.1.8").publish();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0)
+ .with_stderr("\
+[UPDATING] registry `[..]`
+[DOWNLOADING] dep1 v0.1.3 ([..])
+[COMPILING] dep1 v0.1.3 ([..])
+[COMPILING] foo v0.1.0 ([..])
+"));
+}
+
+#[test]
+fn fetch_fetches_all() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "*"
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ Package::new("dep1", "0.1.3").publish();
+
+ assert_that(p.cargo("fetch"),
+ execs().with_status(0)
+ .with_stderr("\
+[UPDATING] registry `[..]`
+[DOWNLOADING] dep1 v0.1.3 ([..])
+"));
+}
+
+#[test]
+fn lock_works_for_everyone() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep2 = "0.1"
+
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "fn main() {}")
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ dep1 = "0.1"
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+
+ Package::new("dep1", "0.1.0").publish();
+ Package::new("dep2", "0.1.0").publish();
+
+ assert_that(p.cargo("generate-lockfile"),
+ execs().with_status(0)
+ .with_stderr("\
+[UPDATING] registry `[..]`
+"));
+
+ Package::new("dep1", "0.1.1").publish();
+ Package::new("dep2", "0.1.1").publish();
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0)
+ .with_stderr("\
+[DOWNLOADING] dep2 v0.1.0 ([..])
+[COMPILING] dep2 v0.1.0 ([..])
+[COMPILING] foo v0.1.0 ([..])
+"));
+
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0)
+ .with_stderr("\
+[DOWNLOADING] dep1 v0.1.0 ([..])
+[COMPILING] dep1 v0.1.0 ([..])
+[COMPILING] bar v0.1.0 ([..])
+"));
+}
+
+#[test]
+fn virtual_works() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(0));
+ assert_that(&p.root().join("Cargo.lock"), existing_file());
+ assert_that(&p.bin("bar"), existing_file());
+ assert_that(&p.root().join("bar/Cargo.lock"), is_not(existing_file()));
+}
+
+#[test]
+fn virtual_misconfigure() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [workspace]
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+ assert_that(p.cargo("build").cwd(p.root().join("bar")),
+ execs().with_status(101)
+ .with_stderr("\
+error: current package believes it's in a workspace when it's not:
+current: [..]bar[..]Cargo.toml
+workspace: [..]Cargo.toml
+
+this may be fixable by adding `bar` to the `workspace.members` array of the \
+manifest located at: [..]
+"));
+}
+
+#[test]
+fn virtual_build() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [workspace]
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/main.rs", "fn main() {}");
+ p.build();
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: manifest path `[..]` is a virtual manifest, but this command \
+requires running against an actual package in this workspace
+"));
+}
+
+#[test]
+fn include_virtual() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ [workspace]
+ members = ["bar"]
+ "#)
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", r#"
+ [workspace]
+ "#);
+ p.build();
+ assert_that(p.cargo("build"),
+ execs().with_status(101)
+ .with_stderr("\
+error: multiple workspace roots found in the same workspace:
+ [..]
+ [..]
+"));
+}
+
+#[test]
+fn members_include_path_deps() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ members = ["p1"]
+
+ [dependencies]
+ p3 = { path = "p3" }
+ "#)
+ .file("src/lib.rs", "")
+ .file("p1/Cargo.toml", r#"
+ [project]
+ name = "p1"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ p2 = { path = "../p2" }
+ "#)
+ .file("p1/src/lib.rs", "")
+ .file("p2/Cargo.toml", r#"
+ [project]
+ name = "p2"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("p2/src/lib.rs", "")
+ .file("p3/Cargo.toml", r#"
+ [project]
+ name = "p3"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("p3/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("build").cwd(p.root().join("p1")),
+ execs().with_status(0));
+ assert_that(p.cargo("build").cwd(p.root().join("p2")),
+ execs().with_status(0));
+ assert_that(p.cargo("build").cwd(p.root().join("p3")),
+ execs().with_status(0));
+ assert_that(p.cargo("build"),
+ execs().with_status(0));
+
+ assert_that(&p.root().join("target"), existing_dir());
+ assert_that(&p.root().join("p1/target"), is_not(existing_dir()));
+ assert_that(&p.root().join("p2/target"), is_not(existing_dir()));
+ assert_that(&p.root().join("p3/target"), is_not(existing_dir()));
+}
+
+#[test]
+fn new_warns_you_this_will_not_work() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+
+ [workspace]
+ "#)
+ .file("src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("new").arg("bar").env("USER", "foo"),
+ execs().with_status(0)
+ .with_stderr("\
+warning: compiling this new crate may not work due to invalid workspace \
+configuration
+
+current package believes it's in a workspace when it's not:
+current: [..]
+workspace: [..]
+
+this may be fixable by ensuring that this crate is depended on by the workspace \
+root: [..]
+"));
+}